runtime.mspan.elemsize (field)

91 uses

	runtime (current package)
		arena.go#L791: 		racemalloc(unsafe.Pointer(span.base()), span.elemsize)
		arena.go#L796: 		msanmalloc(unsafe.Pointer(span.base()), span.elemsize)
		arena.go#L801: 		rzSize := computeRZlog(span.elemsize)
		arena.go#L802: 		span.elemsize -= rzSize
		arena.go#L803: 		span.largeType.Size_ = span.elemsize
		arena.go#L804: 		rzStart := span.base() + span.elemsize
		arena.go#L807: 		asanunpoison(unsafe.Pointer(span.base()), span.elemsize)
		arena.go#L903: 	gcController.totalFree.Add(int64(s.elemsize))
		arena.go#L913: 	atomic.Xadd64(&stats.largeFree, int64(s.elemsize))
		arena.go#L917: 	gcController.update(-int64(s.elemsize), 0)
		arena.go#L921: 		racefree(unsafe.Pointer(s.base()), s.elemsize)
		arena.go#L959: 		racefree(unsafe.Pointer(s.base()), s.elemsize)
		arena.go#L962: 		msanfree(unsafe.Pointer(s.base()), s.elemsize)
		arena.go#L965: 		asanpoison(unsafe.Pointer(s.base()), s.elemsize)
		arena.go#L1065: 	s.elemsize -= userArenaChunkReserveBytes()
		arena.go#L1066: 	s.limit = s.base() + s.elemsize
		arena.go#L1079: 	atomic.Xadd64(&stats.largeAlloc, int64(s.elemsize))
		arena.go#L1084: 	gcController.totalAlloc.Add(int64(s.elemsize))
		arena.go#L1087: 	gcController.update(int64(s.elemsize), 0)
		arena.go#L1102: 	memclrNoHeapPointers(unsafe.Pointer(s.base()), s.elemsize)
		arena.go#L1108: 	s.userArenaChunkFree = makeAddrRange(base, base+s.elemsize)
		arena.go#L1119: 	s.largeType.Size_ = s.elemsize
		cgocall.go#L706: 			if tp, addr = tp.next(base + span.elemsize); addr == 0 {
		heapdump.go#L485: 		size := s.elemsize
		malloc.go#L920: 			return gclinkptr(uintptr(result)*s.elemsize + s.base())
		malloc.go#L956: 	v = gclinkptr(uintptr(freeIndex)*s.elemsize + s.base())
		malloc.go#L1180: 		size = span.elemsize
		malloc.go#L1244: 	fullSize := span.elemsize
		mbitmap.go#L157: 	if base == addr && size == span.elemsize {
		mbitmap.go#L182: 	if heapBitsInSpan(span.elemsize) {
		mbitmap.go#L373: 	return span.base() + span.objIndex(addr)*span.elemsize
		mbitmap.go#L543: 	if (!s.spanclass.noscan() && heapBitsInSpan(s.elemsize)) || s.isUserArenaChunk {
		mbitmap.go#L570: 		if span.elemsize > minSizeForMallocHeader {
		mbitmap.go#L616: 	bits := span.elemsize / goarch.PtrSize
		mbitmap.go#L646: 	bits := span.elemsize / goarch.PtrSize
		mbitmap.go#L710: 		if doubleCheck && (!heapBitsInSpan(dataSize) || !heapBitsInSpan(span.elemsize)) {
		mbitmap.go#L748: 		scanSize = span.elemsize
		mbitmap.go#L757: 		maxIterBytes := span.elemsize
		mbitmap.go#L785: 	maxIterBytes := span.elemsize
		mbitmap.go#L793: 		if i < span.elemsize {
		mbitmap.go#L802: 			tp, addr = tp.next(x + span.elemsize)
		mbitmap.go#L814: 		tp, addr = tp.next(x + span.elemsize)
		mbitmap.go#L821: 	print("runtime: x=", hex(x), " dataSize=", dataSize, " elemsize=", span.elemsize, "\n")
		mbitmap.go#L823: 	print("runtime: limit=", hex(x+span.elemsize), "\n")
		mbitmap.go#L828: 		if tp, addr = tp.next(x + span.elemsize); addr == 0 {
		mbitmap.go#L850: 		if i < span.elemsize {
		mbitmap.go#L879: 	print("runtime: x=", hex(x), " dataSize=", dataSize, " elemsize=", span.elemsize, " interior=", hex(interior), " size=", size, "\n")
		mbitmap.go#L1141: 	if doubleCheck && q != n/s.elemsize {
		mbitmap.go#L1142: 		println(n, "/", s.elemsize, "should be", n/s.elemsize, "but got", q)
		mbitmap.go#L1305: 	base = s.base() + objIndex*s.elemsize
		mbitmap.go#L1793: 		limit := base + s.elemsize
		mbitmap.go#L1814: 		for i := limit; i < s.elemsize; i++ {
		mcache.go#L174: 		bytesAllocated := slotsUsed * int64(s.elemsize)
		mcache.go#L211: 	usedBytes := uintptr(s.allocCount) * s.elemsize
		mcache.go#L279: 			gcController.totalAlloc.Add(slotsUsed * int64(s.elemsize))
		mcache.go#L287: 				dHeapLive -= int64(s.nelems-s.allocCount) * int64(s.elemsize)
		mfinal.go#L446: 	if !span.spanclass.noscan() && !heapBitsInSpan(span.elemsize) && span.spanclass.sizeclass() != 0 {
		mgcmark.go#L389: 					p := s.base() + uintptr(spf.special.offset)/s.elemsize*s.elemsize
		mgcmark.go#L1396: 	n := s.elemsize
		mgcmark.go#L1416: 			for oblet := b + maxObletBytes; oblet < s.base()+s.elemsize; oblet += maxObletBytes {
		mgcmark.go#L1426: 		n = s.base() + s.elemsize - b
		mgcmark.go#L1563: 		obj := span.base() + idx*span.elemsize
		mgcmark.go#L1623: 			gcw.bytesMarked += uint64(span.elemsize)
		mgcmark.go#L1648: 	print(" s.base()=", hex(s.base()), " s.limit=", hex(s.limit), " s.spanclass=", s.spanclass, " s.elemsize=", s.elemsize, " s.state=")
		mgcmark.go#L1656: 	size := s.elemsize
		mgcmark.go#L1709: 	gcw.bytesMarked += uint64(span.elemsize)
		mgcsweep.go#L528: 	size := s.elemsize
		mgcsweep.go#L617: 				x := s.base() + i*s.elemsize
		mgcsweep.go#L677: 		getg().m.p.ptr().trace.reclaimed += uintptr(nfreed) * s.elemsize
		mgcsweep.go#L763: 			gcController.totalFree.Add(int64(nfreed) * int64(s.elemsize))
		mgcsweep.go#L858: 	print("runtime: marked free object in span ", s, ", elemsize=", s.elemsize, " freeindex=", s.freeindex, " (bad use of unsafe.Pointer? try -d=checkptr)\n")
		mgcsweep.go#L862: 		addr := s.base() + i*s.elemsize
		mgcsweep.go#L881: 			length := s.elemsize
		mheap.go#L483: 	elemsize              uintptr       // computed from sizeclass or from npages
		mheap.go#L497: 	size = s.elemsize
		mheap.go#L1400: 			s.elemsize = nbytes
		mheap.go#L1404: 			s.elemsize = uintptr(class_to_size[sizeclass])
		mheap.go#L1405: 			if !s.spanclass.noscan() && heapBitsInSpan(s.elemsize) {
		mheap.go#L1407: 				s.nelems = uint16((nbytes - (nbytes / goarch.PtrSize / 8)) / s.elemsize)
		mheap.go#L1409: 				s.nelems = uint16(nbytes / s.elemsize)
		mheap.go#L1706: 	span.elemsize = 0
		mwbbuf.go#L259: 			gcw.bytesMarked += uint64(span.elemsize)
		pinner.go#L183: 				offset := objIndex * span.elemsize
		pinner.go#L197: 					offset := objIndex * span.elemsize
		pinner.go#L370: 	offset := objIndex * span.elemsize
		stack.go#L210: 		s.elemsize = fixedStack << order
		stack.go#L211: 		for i := uintptr(0); i < _StackCacheSize; i += s.elemsize {
		stack.go#L413: 			s.elemsize = uintptr(n)
		traceallocfree.go#L69: 				x := s.base() + i*s.elemsize